Show the code
targets::tar_visnetwork(targets_only =T)Predictive Models for BoardGameGeek Ratings
targets::tar_visnetwork(targets_only =T)model_board = pins::board_folder("models",
versioned = T)
averageweight_fit =
vetiver_pin_read(
model_board,
"bgg_averageweight"
)
average_fit =
vetiver_pin_read(
model_board,
"bgg_average"
)
usersrated_fit =
vetiver_pin_read(
model_board,
"bgg_usersrated"
)valid_predictions |>
pivot_outcomes() |>
left_join(
games |>
bggUtils:::unnest_outcomes() |>
select(game_id, usersrated),
by = join_by(game_id)
) |>
plot_predictions(alpha = usersrated)+
theme(legend.title = element_text())targets_tracking_details(metrics = valid_metrics,
details = details) |>
select(model, minratings, outcome, any_of(c("rmse", "mae", "mape", "rsq", "ccc"))) |>
filter(minratings == 25) |>
select(minratings, everything()) |>
gt::gt() |>
gt::tab_options(quarto.disable_processing = T) |>
gtExtras::gt_theme_espn()| minratings | model | outcome | rmse | mae | mape | rsq | ccc |
|---|---|---|---|---|---|---|---|
| 25 | lightgbm | average | 0.692 | 0.509 | 7.582 | 0.282 | 0.463 |
| 25 | lightgbm | averageweight | 0.457 | 0.347 | 19.210 | 0.665 | 0.804 |
| 25 | lightgbm+lightgbm | bayesaverage | 0.293 | 0.170 | 2.823 | 0.432 | 0.646 |
| 25 | lightgbm | usersrated | 1565.092 | 461.642 | 184.381 | 0.229 | 0.476 |
average_plot =
average_fit |>
extract_vetiver_features() |>
plot_model_features()+
labs(title = 'Average Rating')
averageweight_plot =
averageweight_fit |>
extract_vetiver_features() |>
plot_model_features()+
labs(title = 'Average Weight')
usersrated_plot =
usersrated_fit |>
extract_vetiver_features() |>
plot_model_features()+
labs(title = 'Users Rated')predictions =
upcoming_games |>
impute_averageweight(
model = averageweight_fit
) |>
predict_bayesaverage(
average_model = average_fit,
usersrated_model = usersrated_fit
)
predictions |>
filter(yearpublished >= 2024) |>
# this goddamn bah humbug game
filter(game_id != 388225) |>
predictions_dt(games = games) |>
add_colors()